home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
EnigmA Amiga Run 1997 April
/
EnigmA AMIGA RUN 17 (1997)(G.R. Edizioni)(IT)[!][issue 1997-04][EAR-CD].iso
/
EARCD
/
dev
/
amos
/
neural.lha
/
network.asc
Wrap
Text File
|
1993-02-25
|
4KB
|
196 lines
'Amos 3 layer Neural Network V1.0
'(C)1996 Lee Atkins.
'
'Only open a screen if you need to see anything.
'
Screen Open 0,640,200,2,Hires
Hide : Curs Off : Flash Off : Cls 0
'
'ACC# is acceptable average squared error.
ACC#=0.008
'LAYERS is number of layers in network (including fanout layer).
'Dont change yet!
LAYERS=3
'HIDDEN is number of neurons in each hidden layer.
HIDDEN=4
'IPUT is number of inputs to network.
IPUT=2
'OUTPUT is number of output neurons.
OUTPUT=1
'RATE# is the learning rate of the network.
'Bigger is not always better!
RATE#=4
'PATTERNS is the total number of training patterns to use.
PATTERNS=4
'ITERATIONS is the number of times the entire training pattern is to
'be presented to the network.
ITERATIONS=640
'
'
'
Dim H#(HIDDEN)
Dim F#(OUTPUT)
Dim X(PATTERNS,IPUT)
Dim R(PATTERNS,OUTPUT)
'
'Setup current training set.
'This is XNOR
'X(n,o) is input where n is pattern number and o is input number.
'R(p,q) is expected output where p is pattern and q is output number.
'
X(1,1)=0 : X(1,2)=0 : R(1,1)=1
X(2,1)=0 : X(2,2)=1 : R(2,1)=0
X(3,1)=1 : X(3,2)=0 : R(3,1)=0
X(4,1)=1 : X(4,2)=1 : R(4,1)=1
'
Z=OUTPUT
If HIDDEN>OUTPUT Then Z=HIDDEN
Dim ERR#(LAYERS-2,Z+1)
Dim W#(LAYERS-2,Z+1,Z+1)
'
'Initialise initial weights for network
'
For L=0 To LAYERS-2
For N=1 To Z
For W=0 To Z-1
W#(L,N,W)=-2
Next W
Next N
Next L
'
ITT=0
MER#=1
'
'Train the network.
'
While ITT<=ITERATIONS and MER#>ACC#
For PAT=1 To PATTERNS
'
'Forward propagation
'
For J=1 To HIDDEN
A#=W#(1,J,0)
For I=1 To IPUT
A#=A#+X(PAT,I)*W#(1,J,I)
Next I
H#(J)=1/(1+Exp(-A#))
Next J
'
For J=1 To OUTPUT
A#=W#(0,J,0)
For I=1 To HIDDEN
A#=A#+H#(I)*W#(0,J,I)
Next I
F#(J)=1/(1+Exp(-A#))
Next J
'
'Compute error for each output neuron
'
For J=1 To OUTPUT
ERR#(0,J)=(R(PAT,J)-F#(J))*(F#(J)*(1-F#(J)))
Next J
'
'Adjust weights, hidden layer to output layer.
'
For J=1 To OUTPUT
W#(0,J,0)=W#(0,J,0)+RATE#*ERR#(0,J)
For I=1 To HIDDEN
W#(0,J,I)=W#(0,J,I)+RATE#*ERR#(0,J)*H#(I)
Next I
Next J
'
'Compute error for each hidden neuron.
'
For J=1 To HIDDEN
A#=0
For K=1 To OUTPUT
A#=A#+ERR#(0,K)*W#(0,K,J)
Next K
ERR#(1,J)=(H#(J)*(1-H#(J)))*A#
Next J
'
'Adjust weights form input layer to hidden layer
'
For J=1 To HIDDEN
W#(1,J,0)=W#(1,J,0)+RATE#*ERR#(1,J)
For I=1 To IPUT
W#(1,J,I)=W#(1,J,I)+RATE#*ERR#(1,J)*X(PAT,I)
Next I
Next J
Next PAT
'
'
'Evaluate average squared error
'
MER#=0
For W=1 To PATTERNS
For H=1 To HIDDEN
A#=W#(1,H,0)
For Q=1 To IPUT
A#=A#+W#(1,H,Q)*X(W,Q)
Next Q
H#(H)=1/(1+Exp(-A#))
Next H
For H=1 To OUTPUT
A#=W#(0,H,0)
For Q=1 To HIDDEN
A#=A#+W#(0,H,Q)*H#(Q)
Next Q
F#(H)=1/(1+Exp(-A#))
Next H
For I=1 To OUTPUT
MER#=MER#+(R(W,I)-F#(I))^2
Next I
MER#=MER#/OUTPUT
Next W
MER#=MER#/PATTERNS
'
'The below is so you can see training progress.
'
'Print PAT;" ";ITT;" ";MER#
Plot ITT*(640/ITERATIONS),200-MER#*400
'
'
ITT=ITT+1
Wend
'
'Training complete!
'
'
'Print out opperating network
'
For W=1 To PATTERNS
Print
For H=1 To HIDDEN
A#=W#(1,H,0)
For Q=1 To IPUT
A#=A#+W#(1,H,Q)*X(W,Q)
Print X(W,Q);" ";
Next Q
H#(H)=1/(1+Exp(-A#))
Next H
For H=1 To OUTPUT
A#=W#(0,H,0)
For Q=1 To HIDDEN
A#=A#+W#(0,H,Q)*H#(Q)
Next Q
F#(H)=1/(1+Exp(-A#))
Print F#(H);" ";
Next H
Next W
Wait Key
'
'Following is not needed if in another program. Better save the
'weights for later use. saves training again.
'
'Print out weights
'
Print : Print "Weights:"
For L=0 To LAYERS-2
For N=1 To Z
For W=0 To Z-1
Print L;" ";N;" ";W;" ";W#(L,N,W)
Next W
Next N
Next L